@angel-estrada7

# Import dependancies
import os
import requests
import pandas as pd
import json
from dotenv import load_dotenv
from etherscan_py import etherscan_py
import plotly.express as px
import matplotlib.pyplot as plt
import hvplot.pandas
import numpy as np
import datetime as dt
import seaborn as sns
from pathlib import Path
%matplotlib inline
# Loading .env containing our keys
load_dotenv()
True
# create variable for api key
api_key = os.getenv('COVALENT_API_KEY')
type(api_key)
str
# import dependancy
from etherscan_py import etherscan_py
etherscan_api = etherscan_py.Client(os.getenv('ETHERSCAN_API'))
# Print current eth price
eth_value = etherscan_api.get_eth_price()
eth_value
2693.29
# Append url for our api
url = "https://api.covalenthq.com/v1"
chain_id = "/1"
azuki_address = "/0xED5AF388653567Af2F388E6224dC7C4b3241C544"
cryptopunks_address = "/0xb47e3cd837dDF8e4c57F05d70Ab865de6e193BBB"
BAYC_address = "/0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D"
date_option = '/?quote-currency=USD&format=JSON&from=2017-01-01&to=2022-05-01'
page_option = '/transactions_v2/?quote-currency=USD&format=JSON&block-signed-at-asc=false&no-logs=false&page-number=0&page-size=1000'
api_option = "&key=" + api_key
api_no_option = '/?key=' + api_key
# Create variables needed for owner data and add to url
BAYC_historical_url = url + chain_id + "/nft_market/collection" + BAYC_address + api_no_option
# Get request
BAYC_historical_json = requests.get(BAYC_historical_url).json()
# Convert historical json data to a dataframe and view data
BAYC_df = pd.DataFrame(BAYC_historical_json['data']['items'])
# Set index to date
BAYC_df = BAYC_df.set_index('opening_date')
# Create Volume dataframe
BAYC_vol_df = pd.DataFrame(BAYC_df, columns = ['volume_quote_day', 'unique_token_ids_sold_count_day']).sort_index()
BAYC_vol_df.head()
| volume_quote_day | unique_token_ids_sold_count_day | |
|---|---|---|
| opening_date | ||
| 2021-04-30 | 8.241964e+02 | 1 |
| 2021-05-01 | 1.737182e+06 | 1635 |
| 2021-05-02 | 4.950946e+06 | 1534 |
| 2021-05-03 | 3.948996e+06 | 996 |
| 2021-05-04 | 1.388962e+06 | 336 |
# Plot Volume quote per day
BAYC_volume = BAYC_vol_df['volume_quote_day'].astype(int)
# BAYC_volume.plot.bar(figsize = (20,4))
px.bar(BAYC_volume)
# Quering the API for transaction data
BAYC_tx_url = url + chain_id + "/address" + BAYC_address + page_option + api_option
BAYC_tx = requests.get(BAYC_tx_url).json()
# Convert transactions data to dataframe
BAYC_tx_df = pd.DataFrame(BAYC_tx['data']['items'], columns = ['to_address_label','fees_paid', 'value_quote','block_signed_at']).set_index('block_signed_at').sort_index()
BAYC_tx_df.head()
| to_address_label | fees_paid | value_quote | |
|---|---|---|---|
| block_signed_at | |||
| 2022-05-03T04:58:00Z | None | 3165483669662600 | 0.0 |
| 2022-05-03T05:03:17Z | None | 1487241858437776 | 0.0 |
| 2022-05-03T05:05:46Z | None | 63010705583674080 | 0.0 |
| 2022-05-03T05:17:24Z | None | 2254442804107520 | 0.0 |
| 2022-05-03T05:18:16Z | None | 2407384820533160 | 0.0 |
# Filter Through data for non null transactions
BAYC_sales_df = BAYC_tx_df[BAYC_tx_df['value_quote'] != 0]
BAYC_sales = BAYC_sales_df[BAYC_sales_df['to_address_label'].notnull()].dropna()
# Creating the plot using plotly express
BAYC_fig = px.bar(BAYC_sales,
x='to_address_label',
y= 'value_quote',
color='value_quote',
height=1020,
width = 1000,
barmode = 'overlay',
labels={'value_quote':'Amount in USD', 'to_address_label': 'Exchange'},
title='USD spent to buy Bore Apes in recent 1000 transactions'
)
BAYC_fig.show()
# Filter Through data for non null transactions
BAYC_fees = BAYC_sales_df['fees_paid'].astype(int)/10**18*eth_value
BAYC_fees.plot.bar(rot = 90, figsize = (20,5), ylabel = 'value in USD')
<AxesSubplot:xlabel='block_signed_at', ylabel='value in USD'>
@mmsaki

# Create variables needed for owner data and add to url
azuki_url = url + chain_id + "/nft_market/collection" + azuki_address + api_no_option
# Get request
azuki_historical_json = requests.get(azuki_url).json()
# Convert historical json data to a dataframe and view data
azuki_df = pd.DataFrame(azuki_historical_json['data']['items'])
# Set index to date
azuki_df = azuki_df.set_index('opening_date')
# Create Volume dataframe
azuki_vol_df = pd.DataFrame(azuki_df, columns = ['volume_quote_day', 'unique_token_ids_sold_count_day']).sort_index()
azuki_vol_df.head()
| volume_quote_day | unique_token_ids_sold_count_day | |
|---|---|---|
| opening_date | ||
| 2022-01-12 | 45941404.0 | 2402 |
| 2022-01-13 | 25129178.0 | 1318 |
| 2022-01-14 | 168151840.0 | 470 |
| 2022-01-15 | 4408686.0 | 499 |
| 2022-01-16 | 295638336.0 | 368 |
# Plot Volume quote per day
azuki_volume = azuki_vol_df['volume_quote_day'].astype(int)
# Plot Historical daily volume
px.bar(azuki_volume)
# Quering the API for transaction data
azuki_tx_url = url + chain_id + "/address" + azuki_address + page_option + api_option
azuki_tx = requests.get(azuki_tx_url).json()
# Convert transactions data to dataframe
azuki_tx_df = pd.DataFrame(azuki_tx['data']['items'], columns = ['to_address_label','fees_paid', 'value_quote','block_signed_at']).set_index('block_signed_at').sort_index()
azuki_tx_df.head()
| to_address_label | fees_paid | value_quote | |
|---|---|---|---|
| block_signed_at | |||
| 2022-05-03T05:10:50Z | None | 4127333930314595 | 0.000000 |
| 2022-05-03T05:18:16Z | Wyvern Exchange Contract (-) | 10140874476295150 | 90667.575461 |
| 2022-05-03T05:18:42Z | None | 2570300864764061 | 0.000000 |
| 2022-05-03T05:23:58Z | None | 37351159788905740 | 0.000000 |
| 2022-05-03T05:26:13Z | Gem: GemSwap 2 | 11788059244049616 | 88137.185303 |
# Filter Through data for non null transactions
azuki_sales_df = azuki_tx_df[azuki_tx_df['value_quote'] != 0]
azuki_sales = azuki_sales_df[azuki_sales_df['to_address_label'].notnull()]
# Creating the plot using plotly express
azuki_fig = px.bar(azuki_sales,
x='to_address_label',
y= 'value_quote',
color='value_quote',
height=1020,
width = 1000,
barmode='overlay',
labels={'value_quote':'Amount in USD', 'to_address_label': 'Exchange'},
title='USD spent to buy Azukis in recent 1000 transactions'
)
azuki_fig.show()
# Filter Through data for non null transactions
azuki_fees = azuki_sales_df['fees_paid'].astype(int)/10**18*eth_value
azuki_fees.plot.bar(rot = 90, figsize = (20,5), ylabel = 'value in USD')
<AxesSubplot:xlabel='block_signed_at', ylabel='value in USD'>
# Create variables needed for owner data and append to url
cryptopunks_historical_url = url + chain_id + "/nft_market/collection" + cryptopunks_address + api_no_option
# Get request
cryptopunks_historical_json = requests.get(cryptopunks_historical_url).json()
# Convert historical json data to a dataframe and view data
cryptopunks_df = pd.DataFrame(cryptopunks_historical_json['data']['items'])
# Set index to date
cryptopunks_df = cryptopunks_df.set_index('opening_date')
# Create Volume dataframe
cryptopunks_vol_df = pd.DataFrame(cryptopunks_df, columns = ['volume_quote_day', 'unique_token_ids_sold_count_day']).sort_index()
cryptopunks_vol_df.head()
| volume_quote_day | unique_token_ids_sold_count_day | |
|---|---|---|
| opening_date | ||
| 2017-06-23 | 0.0 | 19 |
| 2017-06-24 | 0.0 | 22 |
| 2017-06-25 | 0.0 | 11 |
| 2017-06-26 | 0.0 | 18 |
| 2017-06-27 | 0.0 | 35 |
# Plot Volume quote per day
cryptopunks_volume = cryptopunks_vol_df['volume_quote_day'].astype(int)
# cryptopunks_volume.plot.line(figsize = (20,4))
px.bar(cryptopunks_volume)
# Quering the API for transaction data
cryptopunks_tx_url = url + chain_id + "/address" + cryptopunks_address + page_option + api_option
cryptopunks_tx = requests.get(cryptopunks_tx_url).json()
# Convert transactions data to dataframe
cryptopunks_tx_df = pd.DataFrame(cryptopunks_tx['data']['items'], columns = ['to_address_label','fees_paid', 'value_quote','block_signed_at']).set_index('block_signed_at').sort_index()
cryptopunks_tx_df.head()
| to_address_label | fees_paid | value_quote | |
|---|---|---|---|
| block_signed_at | |||
| 2022-04-28T02:07:46Z | CRYPTOPUNKS (Ͼ) | 8059300000000000 | 179579.760742 |
| 2022-04-28T02:25:04Z | CRYPTOPUNKS (Ͼ) | 2233845322427811 | 0.000000 |
| 2022-04-28T02:28:31Z | CRYPTOPUNKS (Ͼ) | 1553815767995790 | 0.000000 |
| 2022-04-28T02:29:01Z | CRYPTOPUNKS (Ͼ) | 1833778000000000 | 0.000000 |
| 2022-04-28T02:29:56Z | None | 19024222924419000 | 0.000000 |
# Create variables needed for owner data and append to url
cryptopunks_historical_url = url + chain_id + "/nft_market/collection" + cryptopunks_address + api_no_option
# Get request
cryptopunks_historical_json = requests.get(cryptopunks_historical_url).json()
# Convert historical json data to a dataframe and view data
cryptopunks_df = pd.DataFrame(cryptopunks_historical_json['data']['items'])
# Set index to date
cryptopunks_df = cryptopunks_df.set_index('opening_date')
# Create Volume dataframe
cryptopunks_vol_df = pd.DataFrame(cryptopunks_df, columns = ['volume_quote_day', 'unique_token_ids_sold_count_day']).sort_index()
cryptopunks_vol_df.head()
| volume_quote_day | unique_token_ids_sold_count_day | |
|---|---|---|
| opening_date | ||
| 2017-06-23 | 0.0 | 19 |
| 2017-06-24 | 0.0 | 22 |
| 2017-06-25 | 0.0 | 11 |
| 2017-06-26 | 0.0 | 18 |
| 2017-06-27 | 0.0 | 35 |
# Filter Through data for non null transactions
cryptopunks_sales_df = cryptopunks_tx_df[cryptopunks_tx_df['value_quote'] != 0]
cryptopunks_sales = cryptopunks_sales_df[cryptopunks_sales_df['to_address_label'].notnull()].dropna()
# Creating the plot using plotly express
cryptopunks_fig = px.bar(cryptopunks_sales,
x='to_address_label',
y= 'value_quote',
color='value_quote',
height=1020,
width = 1000,
barmode = 'overlay',
labels={'value_quote':'Amount in USD', 'to_address_label': 'Exchange'},
title='USD spent to buy Cryptopunks in recent 1000 transactions'
)
cryptopunks_fig.show()
# Filter Through data for non null transactions
cryptopunks_fees = cryptopunks_sales_df['fees_paid'].astype(int)/10**18*eth_value
# plot
cryptopunks_fees.plot.bar(rot = 90, figsize = (20,5), ylabel = 'value in USD')
<AxesSubplot:xlabel='block_signed_at', ylabel='value in USD'>
# Group by address label and sum the value
azuki_total = azuki_sales.groupby('to_address_label').sum()
cryptopunks_total = cryptopunks_sales.groupby('to_address_label').sum()
BAYC_total = BAYC_sales.groupby('to_address_label').sum()
# Combine and rename columns for our total sales data
combined_totals = pd.concat([azuki_total,cryptopunks_total,BAYC_total], axis=1)
combined_totals.columns = ['azuki_total', 'cryptopunks_total','BAYC_total']
# Group by address label and sum the value
combined_totals
| azuki_total | cryptopunks_total | BAYC_total | |
|---|---|---|---|
| to_address_label | |||
| Gem: GemSwap 2 | 9.033323e+05 | 3.683260e+05 | 2.568193e+06 |
| LooksRare: Exchange | 2.030164e+06 | NaN | 9.019504e+06 |
| Wyvern Exchange Contract (-) | 7.022424e+06 | NaN | 2.870285e+07 |
| CRYPTOPUNKS (Ͼ) | NaN | 2.800161e+07 | NaN |
# Plot for combined figure
combined_total_fig = px.bar(combined_totals, height = 700)
# Show Figure
combined_total_fig.show()
# Combine and rename columns for our total sales data
azuki_usd_fees = azuki_sales['fees_paid'].astype(int)/10**18*eth_value
cryptopunks_usd_fees = cryptopunks_sales['fees_paid'].astype(int)/10**18*eth_value
BAYC_usd_fees = BAYC_sales['fees_paid'].astype(int)/10**18*eth_value
# Combine dataframe and drop nulls
combined_usd_fees = pd.concat([azuki_usd_fees.reset_index(drop=True),
cryptopunks_usd_fees.reset_index(drop=True),
BAYC_usd_fees.reset_index(drop=True)],
axis=1
).dropna()
combined_usd_fees.columns = ['azuki_fees', 'cryptopunks_fees','BAYC_fees']
# Plot for combined figure
combined_fees_fig = px.violin(combined_usd_fees, height = 700, title = "Value in USD for fees paid in NFT Sales")
# Show Figure
combined_fees_fig.show()
@dockingbay24

# create variable for api key for etherscan
ETHERSCAN_API_KEY = os.getenv("ETHERSCAN_API")
#set api url variables for Etherscan call
cryptopunks_contract = "0xb47e3cd837dDF8e4c57F05d70Ab865de6e193BBB"
etherscan_url = "https://api.etherscan.io/api"
module = "?module=contract"
action = "&action=getsourcecode"
address ="&address=" + cryptopunks_contract
etherscan_key = "&apikey=" + ETHERSCAN_API_KEY
# Set API call string
url_cryptopunks_contract_details = etherscan_url+module+action+address+etherscan_key
# Get results from API call
cryptopunk_contract_details= requests.get(url_cryptopunks_contract_details).json()
# Set api url variables for Etherscan call
transaction_hash = "0x15f8e5ea1079d9a0bb04a4c58ae5fe7654b5b2b4463375ff7ffb490aa0032f3a" #replace with trans_hash
etherscan_url = "https://api.etherscan.io/api"
module = "?module=transaction"
action = "&action=getstatus"
address ="&&txhash=" + transaction_hash
etherscan_key = "&apikey=" + ETHERSCAN_API_KEY
# Set API call string
url_cryptopunks_transactions = etherscan_url+module+action+address+etherscan_key
# Get results from API call
cryptopunk_transaction=requests.get(url_cryptopunks_transactions).json()
# Append url for our api
url = "https://api.covalenthq.com/v1"
chain_id = "/1" #TEMP is it always chain1 for most part?
option = "/nft_market/collection"
# Add search queries to api url
contract_address = "/0xb7f7f6c52f2e2fdb1963eab30438024864c313f6" #Do we want to compare other contracts
currency = "/?quote-currency=USD"
format_output = "&format=JSON"
date_from ="&from=2022-01-25"
date_to = "&to=2022-04-25"
covalent_api_key = "&key=" + api_key
url_nft_market_cap_detail = url + chain_id + option + contract_address + currency + format_output + date_from + date_to + covalent_api_key
#set API call string
url_nft_market_cap_detail = url + chain_id + option + contract_address + currency + format_output + date_from + date_to + covalent_api_key
#get results from API call
nft_market_cap = requests.get(url_nft_market_cap_detail).json()
#set data into a dataframe
nft_market_cap_df = pd.DataFrame(nft_market_cap['data']['items'])
#display head and tail of df
display(nft_market_cap_df.head())
| chain_id | collection_name | collection_address | collection_ticker_symbol | opening_date | volume_wei_day | volume_quote_day | average_volume_wei_day | average_volume_quote_day | unique_token_ids_sold_count_day | ... | fourth_nft_image_token_id | fourth_nft_image | fourth_nft_image_256 | fourth_nft_image_512 | fourth_nft_image_1024 | fifth_nft_image_token_id | fifth_nft_image | fifth_nft_image_256 | fifth_nft_image_512 | fifth_nft_image_1024 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1 | Wrapped Cryptopunks | 0xb7f7f6c52f2e2fdb1963eab30438024864c313f6 | WPUNKS | 2022-03-30 | 201500000000000000000 | 687428.9000 | 201500000000000000000 | 687428.9000 | 1 | ... | 51 | https://wrappedpunks.com:3000/images/punks/51.png | https://image-proxy.svc.prod.covalenthq.com/25... | https://image-proxy.svc.prod.covalenthq.com/51... | https://image-proxy.svc.prod.covalenthq.com/10... | 60 | https://wrappedpunks.com:3000/images/punks/60.png | https://image-proxy.svc.prod.covalenthq.com/25... | https://image-proxy.svc.prod.covalenthq.com/51... | https://image-proxy.svc.prod.covalenthq.com/10... |
| 1 | 1 | Wrapped Cryptopunks | 0xb7f7f6c52f2e2fdb1963eab30438024864c313f6 | WPUNKS | 2022-02-03 | 250000000000000000 | 664.4058 | 250000000000000000 | 664.4058 | 1 | ... | 51 | https://wrappedpunks.com:3000/images/punks/51.png | https://image-proxy.svc.prod.covalenthq.com/25... | https://image-proxy.svc.prod.covalenthq.com/51... | https://image-proxy.svc.prod.covalenthq.com/10... | 60 | https://wrappedpunks.com:3000/images/punks/60.png | https://image-proxy.svc.prod.covalenthq.com/25... | https://image-proxy.svc.prod.covalenthq.com/51... | https://image-proxy.svc.prod.covalenthq.com/10... |
2 rows × 39 columns
#TEMP descibe data
nft_market_cap_df.describe()
| chain_id | volume_quote_day | average_volume_quote_day | unique_token_ids_sold_count_day | floor_price_quote_7d | gas_quote_rate_day | |
|---|---|---|---|---|---|---|
| count | 2.0 | 2.000000 | 2.000000 | 2.0 | 2.000000 | 2.000000 |
| mean | 1.0 | 344046.652900 | 344046.652900 | 1.0 | 344046.652900 | 3034.590450 |
| std | 0.0 | 485615.830927 | 485615.830927 | 0.0 | 485615.830927 | 533.112056 |
| min | 1.0 | 664.405800 | 664.405800 | 1.0 | 664.405800 | 2657.623300 |
| 25% | 1.0 | 172355.529350 | 172355.529350 | 1.0 | 172355.529350 | 2846.106875 |
| 50% | 1.0 | 344046.652900 | 344046.652900 | 1.0 | 344046.652900 | 3034.590450 |
| 75% | 1.0 | 515737.776450 | 515737.776450 | 1.0 | 515737.776450 | 3223.074025 |
| max | 1.0 | 687428.900000 | 687428.900000 | 1.0 | 687428.900000 | 3411.557600 |
#TEMP list columns of df
nft_market_cap_df.columns
Index(['chain_id', 'collection_name', 'collection_address',
'collection_ticker_symbol', 'opening_date', 'volume_wei_day',
'volume_quote_day', 'average_volume_wei_day',
'average_volume_quote_day', 'unique_token_ids_sold_count_day',
'floor_price_wei_7d', 'floor_price_quote_7d', 'gas_quote_rate_day',
'quote_currency', 'first_nft_image_token_id', 'first_nft_image',
'first_nft_image_256', 'first_nft_image_512', 'first_nft_image_1024',
'second_nft_image_token_id', 'second_nft_image', 'second_nft_image_256',
'second_nft_image_512', 'second_nft_image_1024',
'third_nft_image_token_id', 'third_nft_image', 'third_nft_image_256',
'third_nft_image_512', 'third_nft_image_1024',
'fourth_nft_image_token_id', 'fourth_nft_image', 'fourth_nft_image_256',
'fourth_nft_image_512', 'fourth_nft_image_1024',
'fifth_nft_image_token_id', 'fifth_nft_image', 'fifth_nft_image_256',
'fifth_nft_image_512', 'fifth_nft_image_1024'],
dtype='object')
# Create a new data frame for graphing volume, drop un-needed columns
market_cap_df_graph = nft_market_cap_df[["opening_date","volume_quote_day"]].copy()
# TEMP display market_cap_df_graph
market_cap_df_graph
| opening_date | volume_quote_day | |
|---|---|---|
| 0 | 2022-03-30 | 687428.9000 |
| 1 | 2022-02-03 | 664.4058 |
# Graph dataframe for analysis
from bokeh.models.formatters import NumeralTickFormatter
formatter = NumeralTickFormatter(format="0,0")
market_cap_df_graph.hvplot.bar(
x='opening_date',
y='volume_quote_day',
xlabel='Opening Date',
ylabel='Volume',
rot=90,
title='Volume Quote Per Day - 0xb7f7f6c52f2e2fdb1963eab30438024864c313f6',
height= 600,
width = 2000
).opts(
yformatter=formatter
)
# Set variables
url = "https://api.covalenthq.com/v1"
chain_id = "/1" #TEMP is it always chain1 for most part?
option = "/nft_market/collection"
# Add search queries to api url
contract_address2 = "/0xb47e3cd837dDF8e4c57F05d70Ab865de6e193BBB"
currency = "/?quote-currency=USD"
format_output = "&format=JSON"
date_from ="&from=2022-01-25"
date_to = "&to=2022-04-25"
covalent_api_key = "&key=" + api_key
# Append url for our api
url_nft_market_cap_detail2 = url + chain_id + option + contract_address2 + currency + format_output + date_from + date_to + covalent_api_key
#set API call string
url_nft_market_cap_detail2 = url + chain_id + option + contract_address2 + currency + format_output + date_from + date_to + covalent_api_key
#get results from API call
nft_market_cap2 = requests.get(url_nft_market_cap_detail2).json()
#set data into a dataframe
nft_market_cap_df2 = pd.DataFrame(nft_market_cap2['data']['items'])
#set data into a dataframe
nft_market_cap_df2 = pd.DataFrame(nft_market_cap2['data']['items'])
#create a new data frame for graphing volume, drop un-needed columns
market_cap_df_graph_2 = nft_market_cap_df2[["opening_date","volume_quote_day"]].copy()
#Graph dataframe for analysis
from bokeh.models.formatters import NumeralTickFormatter
formatter = NumeralTickFormatter(format="0,0")
market_cap_df_graph_2.hvplot.bar(
x='opening_date',
y='volume_quote_day',
xlabel='Opening Date',
ylabel='Volume',
rot=90,
title='Volume Quote Per Day - Cryptopunks',
height= 600,
width = 1000
).opts(
yformatter=formatter
)
from bokeh.models.formatters import NumeralTickFormatter
formatter = NumeralTickFormatter(format="0,0")
graph2 = market_cap_df_graph_2.hvplot.bar(
x='opening_date',
y='volume_quote_day',
xlabel='Opening Date',
ylabel='Volume',
rot=90,
label='0xb47e3cd837dDF8e4c57F05d70Ab865de6e193BBB',
height= 600,
width = 1000
).opts(
yformatter=formatter
)
graph1 = market_cap_df_graph.hvplot.bar(
x='opening_date',
y='volume_quote_day',
xlabel='Opening Date',
ylabel='Volume',
rot=90,
label='0xb7f7f6c52f2e2fdb1963eab30438024864c313f6',
height= 600,
width = 1000
).opts(
yformatter=formatter
)
graph2 * graph1
market_cap_df_graph['Token'] = '0xb7f7f6c52f2e2fdb1963eab30438024864c313f6'
market_cap_df_graph_2['Token'] = '0xb47e3cd837dDF8e4c57F05d70Ab865de6e193BBB'
# combine dataframes into a single df
combined_df = pd.concat([market_cap_df_graph, market_cap_df_graph_2], join="outer", ignore_index=False)
# Combined Token graph
from bokeh.models.formatters import NumeralTickFormatter
formatter = NumeralTickFormatter(format="0,0")
combined_df.hvplot.scatter(
x='opening_date',
y='volume_quote_day',
xlabel='Date',
ylabel='Volume',
rot=90,
label='Combined Analysis',
by='Token',
attr_labels=False,
height= 600,
width = 1000
).opts(
yformatter=formatter
)
# Read in all cryptopunkowners
cryptopunk_owners_path = Path("./Resources_punks/2022-05_all_cryptopunk_owners.csv")
# Read in top20 sales, by ether value
top20_sales_path = Path("./Resources_punks/top20_sales_by_ether_value.csv")
#import into dataframes
cryptopunk_owners_df = pd.read_csv(cryptopunk_owners_path, index_col="#", parse_dates=True, infer_datetime_format=True)
top20_sales_df = pd.read_csv(top20_sales_path, index_col="Punk", parse_dates=True, infer_datetime_format=True)
# Display tem values for dataframes
display(cryptopunk_owners_df.head())
display(top20_sales_df.head())
| Account | OpenSea / ENS | Number Owned | last Active | |
|---|---|---|---|---|
| # | ||||
| 1 | 0xb7f7f6c52f2e2 | WrappedCryptoPu | 428 | 7 hours ago |
| 2 | 0xa858ddc0445d8 | NaN | 423 | 1 month ago |
| 3 | 0xa25803ab86a32 | wilcox.eth | 238 | 28 days ago |
| 4 | 0xb88f61e6fbda8 | NaN | 215 | 11 months ago |
| 5 | 0x577ebc5de943e | NaN | 165 | 5 days ago |
| Ether | EtherValueUSD_M | Date | |
|---|---|---|---|
| Punk | |||
| 5822 | 8000 | 23.70 | 02/12/22 |
| 7804 | 4200 | 7.57 | 03/11/21 |
| 3100 | 4200 | 7.58 | 03/11/22 |
| 5577 | 2500 | 7.70 | 02/09/22 |
| 4156 | 2500 | 10.26 | 12/09/21 |
#plot top20 sales by Punk based on Ether
top20_sales_df.hvplot.scatter(
x='EtherValueUSD_M',
y='Ether',
xlabel='Ether value in USD Millions',
ylabel='Ether',
rot=90,
label='Top 20 Sales By Ether',
by='Punk',
height= 600,
width = 1000
).opts(
bgcolor='lightgray',
#fontsize={'title': 16, 'labels': 14, 'xticks': 6, 'yticks': 12}
)
# Plot top20 sales by Punk based on Ether
top20_sales_df.hvplot.table(
x='EtherValueUSD_M',
y='Ether',
xlabel='Ether value in USD Millions',
ylabel='Ether',
rot=90,
label='Top 20 Sales By Ether',
by='Punk',
height= 600,
width = 1000
).opts(
bgcolor='lightgray',
#fontsize={'title': 16, 'labels': 14, 'xticks': 6, 'yticks': 12}
)
#validate dataframe total owned is 10,000
cryptopunk_total_assets = cryptopunk_owners_df['Number Owned'].sum()
cryptopunk_total_assets
10000
#find mean number of NFTs owned per owner
cryptopunk_owners_mean = cryptopunk_owners_df['Number Owned'].mean()
cryptopunk_owners_mean
2.914602156805596
#top20 asset owners
top20_cryptopunk_owners = cryptopunk_owners_df.head(20)
top20_cryptopunk_owners
| Account | OpenSea / ENS | Number Owned | last Active | |
|---|---|---|---|---|
| # | ||||
| 1 | 0xb7f7f6c52f2e2 | WrappedCryptoPu | 428 | 7 hours ago |
| 2 | 0xa858ddc0445d8 | NaN | 423 | 1 month ago |
| 3 | 0xa25803ab86a32 | wilcox.eth | 238 | 28 days ago |
| 4 | 0xb88f61e6fbda8 | NaN | 215 | 11 months ago |
| 5 | 0x577ebc5de943e | NaN | 165 | 5 days ago |
| 6 | 0x69021ae876958 | sov.eth | 146 | 6 months ago |
| 7 | 0x26f744711ee9e | NaN | 141 | 4 years ago |
| 8 | 0x4084df8bf74ba | NaN | 98 | NaN |
| 9 | 0x269616d549d7e | NaN | 96 | 9 days ago |
| 10 | 0x31a5ff62a1b2c | NaN | 93 | 1 month ago |
| 11 | 0x7174039818a41 | NaN | 89 | 3 years ago |
| 12 | 0xcc7c335f3365a | NaN | 87 | 13 days ago |
| 13 | 0x51688cd36c188 | NaN | 79 | 6 days ago |
| 14 | 0x810fdbc7e5cfe | NaN | 77 | 13 hours ago |
| 15 | 0xf5a4ba515dd36 | NaN | 75 | 1 month ago |
| 16 | 0xcffc336e6d019 | NaN | 74 | 2 months ago |
| 17 | 0x6f4a2d3a4f47f | NaN | 70 | 9 days ago |
| 18 | 0x062c5432107e3 | NaN | 68 | 3 months ago |
| 19 | 0x7760e0243ca9b | NaN | 66 | 3 years ago |
| 20 | 0xdde8df9a7dc9f | Kenney | 66 | 2 months ago |
#total amount of assets owned by the top20 owners
top20_owners_assets = cryptopunk_owners_df['Number Owned'].head(20).sum()
perc_top20_owners_assets = top20_owners_assets /cryptopunk_total_assets *100
print(f"The Top 20 owners own {top20_owners_assets} NFTs, which is {perc_top20_owners_assets:.2f}% of total assets.")
The Top 20 owners own 2794 NFTs, which is 27.94% of total assets.
#total amount of assets owned by the top100 owners
top100_owners_assets = cryptopunk_owners_df['Number Owned'].head(100).sum()
perc_top100_owners_assets = top100_owners_assets /cryptopunk_total_assets *100
print(f"The Top 100 owners own {top100_owners_assets} NFTs, which is {perc_top100_owners_assets:.2f}% of total assets.")
The Top 100 owners own 4705 NFTs, which is 47.05% of total assets.
#total amount of assets owned by the top20 owners
bot20_owners_assets = cryptopunk_owners_df['Number Owned'].tail(20).sum()
perc_bot20_owners_assets = bot20_owners_assets /cryptopunk_total_assets *100
print(f"The Bottom 20 owners own {bot20_owners_assets} NFTs, which is {perc_bot20_owners_assets:.2f}% of total assets.")
The Bottom 20 owners own 20 NFTs, which is 0.20% of total assets.